runtime.mspan.base (method)

85 uses

	runtime (current package)
		arena.go#L554: 	base := s.base()
		arena.go#L612: 	offset := addr - s.base()
		arena.go#L677: 	offset := addr - s.base()
		arena.go#L779: 	x := unsafe.Pointer(span.base())
		arena.go#L786: 		gcmarknewobject(span, span.base())
		arena.go#L791: 		racemalloc(unsafe.Pointer(span.base()), span.elemsize)
		arena.go#L796: 		msanmalloc(unsafe.Pointer(span.base()), span.elemsize)
		arena.go#L804: 		rzStart := span.base() + span.elemsize
		arena.go#L805: 		span.userArenaChunkFree = makeAddrRange(span.base(), rzStart)
		arena.go#L807: 		asanunpoison(unsafe.Pointer(span.base()), span.elemsize)
		arena.go#L819: 			profilealloc(mp, unsafe.Pointer(span.base()), userArenaChunkBytes)
		arena.go#L892: 	sysFault(unsafe.Pointer(s.base()), s.npages*pageSize)
		arena.go#L921: 		racefree(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L959: 		racefree(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L962: 		msanfree(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L965: 		asanpoison(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L1015: 		base = s.base()
		arena.go#L1066: 	s.limit = s.base() + s.elemsize
		arena.go#L1102: 	memclrNoHeapPointers(unsafe.Pointer(s.base()), s.elemsize)
		heapdump.go#L462: 				p := unsafe.Pointer(s.base() + uintptr(spf.special.offset))
		heapdump.go#L484: 		p := s.base()
		heapdump.go#L661: 			p := s.base() + uintptr(spp.special.offset)
		malloc.go#L920: 			return gclinkptr(uintptr(result)*s.elemsize + s.base())
		malloc.go#L956: 	v = gclinkptr(uintptr(freeIndex)*s.elemsize + s.base())
		malloc.go#L1181: 		x = unsafe.Pointer(span.base())
		mbitmap.go#L373: 	return span.base() + span.objIndex(addr)*span.elemsize
		mbitmap.go#L442: 	} else if s.state.get() != mSpanInUse || dst < s.base() || s.limit <= dst {
		mbitmap.go#L579: 		return heapBitsSlice(span.base(), pageSize)
		mbitmap.go#L581: 	return heapBitsSlice(span.base(), span.npages*pageSize)
		mbitmap.go#L604: 	hbits := (*byte)(unsafe.Pointer(span.base() + spanSize - bitmapSize))
		mbitmap.go#L614: 	i := (addr - span.base()) / goarch.PtrSize / ptrBits
		mbitmap.go#L615: 	j := (addr - span.base()) / goarch.PtrSize % ptrBits
		mbitmap.go#L662: 	o := (x - span.base()) / goarch.PtrSize
		mbitmap.go#L730: 				memclrNoHeapPointers(unsafe.Pointer(progSpan.base()), progSpan.npages*pageSize)
		mbitmap.go#L736: 			gctyp = (*_type)(unsafe.Pointer(progSpan.base()))
		mbitmap.go#L739: 			gctyp.GCData = (*byte)(add(unsafe.Pointer(progSpan.base()), heapBitsOff))
		mbitmap.go#L1152: 	return s.divideByElemSize(p - s.base())
		mbitmap.go#L1239: 		print(" span.base()=", hex(s.base()), " span.limit=", hex(s.limit), " span.state=", state)
		mbitmap.go#L1291: 	if state := s.state.get(); state != mSpanInUse || p < s.base() || p >= s.limit {
		mbitmap.go#L1305: 	base = s.base() + objIndex*s.elemsize
		mcache.go#L254: 	s.limit = s.base() + size
		mcentral.go#L262: 	s.limit = s.base() + size*n
		mgcmark.go#L389: 					p := s.base() + uintptr(spf.special.offset)/s.elemsize*s.elemsize
		mgcmark.go#L1410: 		if b == s.base() {
		mgcmark.go#L1416: 			for oblet := b + maxObletBytes; oblet < s.base()+s.elemsize; oblet += maxObletBytes {
		mgcmark.go#L1426: 		n = s.base() + s.elemsize - b
		mgcmark.go#L1428: 		tp = s.typePointersOfUnchecked(s.base())
		mgcmark.go#L1563: 		obj := span.base() + idx*span.elemsize
		mgcmark.go#L1615: 		arena, pageIdx, pageMask := pageIndexOf(span.base())
		mgcmark.go#L1648: 	print(" s.base()=", hex(s.base()), " s.limit=", hex(s.limit), " s.spanclass=", s.spanclass, " s.elemsize=", s.elemsize, " s.state=")
		mgcmark.go#L1703: 	arena, pageIdx, pageMask := pageIndexOf(span.base())
		mgcsweep.go#L551: 		p := s.base() + objIndex*size
		mgcsweep.go#L557: 			endOffset := p - s.base() + size
		mgcsweep.go#L574: 					p := s.base() + uintptr(special.offset)
		mgcsweep.go#L590: 					p := s.base() + uintptr(special.offset)
		mgcsweep.go#L617: 				x := s.base() + i*s.elemsize
		mgcsweep.go#L817: 				sysFault(unsafe.Pointer(s.base()), size)
		mgcsweep.go#L862: 		addr := s.base() + i*s.elemsize
		mgcwork.go#L391: 			newb := (*workbuf)(unsafe.Pointer(s.base() + i))
		mheap.go#L491: func (s *mspan) base() uintptr {
		mheap.go#L647: 	if s == nil || b < s.base() {
		mheap.go#L722: 	if s == nil || s.state.get() != mSpanInUse || p < s.base() || p >= s.limit {
		mheap.go#L1393: 		s.limit = s.base() + s.npages*pageSize
		mheap.go#L1446: 	h.setSpans(s.base(), npages, s)
		mheap.go#L1454: 		arena, pageIdx, pageMask := pageIndexOf(s.base())
		mheap.go#L1569: 			base := unsafe.Pointer(s.base())
		mheap.go#L1575: 			base := unsafe.Pointer(s.base())
		mheap.go#L1624: 			print("mheap.freeSpanLocked - span ", s, " ptr ", hex(s.base()), " allocCount ", s.allocCount, " sweepgen ", s.sweepgen, "/", h.sweepgen, "\n")
		mheap.go#L1630: 		arena, pageIdx, pageMask := pageIndexOf(s.base())
		mheap.go#L1659: 	h.pages.free(s.base(), s.npages)
		mheap.go#L1838: 	arenaPage := (s.base() / pageSize) % pagesPerArena
		mheap.go#L1839: 	ai := arenaIndex(s.base())
		mheap.go#L1846: 	arenaPage := (s.base() / pageSize) % pagesPerArena
		mheap.go#L1847: 	ai := arenaIndex(s.base())
		mheap.go#L1870: 	offset := uintptr(p) - span.base()
		mheap.go#L1905: 	offset := uintptr(p) - span.base()
		mheap.go#L2221: 	offset := uintptr(p) - span.base()
		mwbbuf.go#L253: 		arena, pageIdx, pageMask := pageIndexOf(span.base())
		signal_unix.go#L410: 			if s != nil && s.state.get() == mSpanManual && s.base() < sp && sp < s.limit {
		signal_unix.go#L411: 				gp := *(**g)(unsafe.Pointer(s.base()))
		stack.go#L212: 			x := gclinkptr(s.base() + i)
		stack.go#L415: 		v = unsafe.Pointer(s.base())
		stack.go#L505: 			println(hex(s.base()), v)
		traceallocfree.go#L69: 				x := s.base() + i*s.elemsize
		traceallocfree.go#L107: 	return traceArg(uint64(s.base())-trace.minPageHeapAddr) / pageSize